!pip install scipy==1.11
Requirement already satisfied: scipy==1.11 in /opt/conda/lib/python3.10/site-packages (1.11.0) Requirement already satisfied: numpy<1.28.0,>=1.21.6 in /opt/conda/lib/python3.10/site-packages (from scipy==1.11) (1.23.5)
!pip install tensorflow-io
Requirement already satisfied: tensorflow-io in /opt/conda/lib/python3.10/site-packages (0.31.0) Requirement already satisfied: tensorflow-io-gcs-filesystem==0.31.0 in /opt/conda/lib/python3.10/site-packages (from tensorflow-io) (0.31.0)
# Importing Libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.applications.inception_v3 import InceptionV3
from tensorflow.keras.layers import Dense, Dropout, BatchNormalization
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.preprocessing import image
data_dir = '/kaggle/input/asl-digits-0-9/ASL Digits/asl_dataset_digits'
data = tf.keras.preprocessing.image_dataset_from_directory(data_dir)
Found 570 files belonging to 10 classes.
#Filtering out corrupted images
import os
num_skipped = 0
for folder_name in ("0","1","2","3","4","5","6","7","8","9"):
folder_path = os.path.join(data_dir, folder_name)
for fname in os.listdir(folder_path):
fpath = os.path.join(folder_path, fname)
try:
fobj = open(fpath, "rb")
is_jfif = tf.compat.as_bytes("JFIF") in fobj.peek(10)
finally:
fobj.close()
if not is_jfif:
num_skipped += 1
# Delete corrupted image
os.remove(fpath)
print("Deleted %d images" % num_skipped)
Deleted 0 images
# Create an ImageDataGenerator and do Image Augmentation
datagen = ImageDataGenerator(
rescale = 1./255,
rotation_range = 40,
width_shift_range = 0.2,
height_shift_range = 0.2,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True,
fill_mode = 'nearest',
validation_split = 0.2)
height = 228
width = 228
channels = 3
batch_size = 32
img_shape = (height, width, channels)
img_size = (height, width)
train_data = datagen.flow_from_directory(
data_dir,
target_size = img_size,
batch_size = batch_size,
class_mode = 'categorical',
subset = 'training')
val_data = datagen.flow_from_directory(
data_dir,
target_size = img_size,
batch_size = batch_size,
class_mode='categorical',
subset = 'validation')
Found 460 images belonging to 10 classes. Found 110 images belonging to 10 classes.
num_classes = len(data.class_names)
print('.... Number of Classes : {0} ....'.format(num_classes))
.... Number of Classes : 10 ....
#Defing a function to see images
def show_img(data):
plt.figure(figsize=(15,15))
for images, labels in data.take(1):
for i in range(9):
ax = plt.subplot(3, 3, i + 1)
ax.imshow(images[i].numpy().astype("uint8"))
ax.axis("off")
#Plotting the images in dataset
show_img(data)
import matplotlib.pyplot as plt
class ImageShowerWithLabels:
def show_img(self, data):
plt.figure(figsize=(15, 15))
for images, labels in data.take(1):
for i in range(9):
ax = plt.subplot(3, 3, i + 1)
ax.imshow(images[i].numpy().astype("uint8"))
ax.set_title(f"Label: {labels[i].numpy()}")
ax.axis("off")
class DatasetPlotterWithLabels:
def __init__(self, data):
self.data = data
def plot_images(self):
plt.figure(figsize=(15, 15))
for images, labels in self.data.take(1):
for i in range(9):
ax = plt.subplot(3, 3, i + 1)
ax.imshow(images[i].numpy().astype("uint8"))
ax.set_title(f"Label: {labels[i].numpy()}")
ax.axis("off")
# Usage
# Create an instance of ImageShowerWithLabels
image_shower = ImageShowerWithLabels()
image_shower.show_img(data)
# Create an instance of DatasetPlotterWithLabels and pass the data
dataset_plotter = DatasetPlotterWithLabels(data)
dataset_plotter.plot_images()
# load pre-trained InceptionV3
pre_trained = InceptionV3(weights='imagenet', include_top=False, input_shape=img_shape, pooling='avg')
for layer in pre_trained.layers:
layer.trainable = False
x = pre_trained.output
x = BatchNormalization()(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.2)(x)
predictions = Dense(num_classes, activation='softmax')(x)
model = Model(inputs = pre_trained.input, outputs = predictions)
model.compile(optimizer = Adam(learning_rate=0.001), loss='categorical_crossentropy', metrics=['accuracy'])
model.summary()
Model: "model_1"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_2 (InputLayer) [(None, 228, 228, 3 0 []
)]
conv2d_94 (Conv2D) (None, 113, 113, 32 864 ['input_2[0][0]']
)
batch_normalization_95 (BatchN (None, 113, 113, 32 96 ['conv2d_94[0][0]']
ormalization) )
activation_94 (Activation) (None, 113, 113, 32 0 ['batch_normalization_95[0][0]']
)
conv2d_95 (Conv2D) (None, 111, 111, 32 9216 ['activation_94[0][0]']
)
batch_normalization_96 (BatchN (None, 111, 111, 32 96 ['conv2d_95[0][0]']
ormalization) )
activation_95 (Activation) (None, 111, 111, 32 0 ['batch_normalization_96[0][0]']
)
conv2d_96 (Conv2D) (None, 111, 111, 64 18432 ['activation_95[0][0]']
)
batch_normalization_97 (BatchN (None, 111, 111, 64 192 ['conv2d_96[0][0]']
ormalization) )
activation_96 (Activation) (None, 111, 111, 64 0 ['batch_normalization_97[0][0]']
)
max_pooling2d_4 (MaxPooling2D) (None, 55, 55, 64) 0 ['activation_96[0][0]']
conv2d_97 (Conv2D) (None, 55, 55, 80) 5120 ['max_pooling2d_4[0][0]']
batch_normalization_98 (BatchN (None, 55, 55, 80) 240 ['conv2d_97[0][0]']
ormalization)
activation_97 (Activation) (None, 55, 55, 80) 0 ['batch_normalization_98[0][0]']
conv2d_98 (Conv2D) (None, 53, 53, 192) 138240 ['activation_97[0][0]']
batch_normalization_99 (BatchN (None, 53, 53, 192) 576 ['conv2d_98[0][0]']
ormalization)
activation_98 (Activation) (None, 53, 53, 192) 0 ['batch_normalization_99[0][0]']
max_pooling2d_5 (MaxPooling2D) (None, 26, 26, 192) 0 ['activation_98[0][0]']
conv2d_102 (Conv2D) (None, 26, 26, 64) 12288 ['max_pooling2d_5[0][0]']
batch_normalization_103 (Batch (None, 26, 26, 64) 192 ['conv2d_102[0][0]']
Normalization)
activation_102 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_103[0][0]']
conv2d_100 (Conv2D) (None, 26, 26, 48) 9216 ['max_pooling2d_5[0][0]']
conv2d_103 (Conv2D) (None, 26, 26, 96) 55296 ['activation_102[0][0]']
batch_normalization_101 (Batch (None, 26, 26, 48) 144 ['conv2d_100[0][0]']
Normalization)
batch_normalization_104 (Batch (None, 26, 26, 96) 288 ['conv2d_103[0][0]']
Normalization)
activation_100 (Activation) (None, 26, 26, 48) 0 ['batch_normalization_101[0][0]']
activation_103 (Activation) (None, 26, 26, 96) 0 ['batch_normalization_104[0][0]']
average_pooling2d_9 (AveragePo (None, 26, 26, 192) 0 ['max_pooling2d_5[0][0]']
oling2D)
conv2d_99 (Conv2D) (None, 26, 26, 64) 12288 ['max_pooling2d_5[0][0]']
conv2d_101 (Conv2D) (None, 26, 26, 64) 76800 ['activation_100[0][0]']
conv2d_104 (Conv2D) (None, 26, 26, 96) 82944 ['activation_103[0][0]']
conv2d_105 (Conv2D) (None, 26, 26, 32) 6144 ['average_pooling2d_9[0][0]']
batch_normalization_100 (Batch (None, 26, 26, 64) 192 ['conv2d_99[0][0]']
Normalization)
batch_normalization_102 (Batch (None, 26, 26, 64) 192 ['conv2d_101[0][0]']
Normalization)
batch_normalization_105 (Batch (None, 26, 26, 96) 288 ['conv2d_104[0][0]']
Normalization)
batch_normalization_106 (Batch (None, 26, 26, 32) 96 ['conv2d_105[0][0]']
Normalization)
activation_99 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_100[0][0]']
activation_101 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_102[0][0]']
activation_104 (Activation) (None, 26, 26, 96) 0 ['batch_normalization_105[0][0]']
activation_105 (Activation) (None, 26, 26, 32) 0 ['batch_normalization_106[0][0]']
mixed0 (Concatenate) (None, 26, 26, 256) 0 ['activation_99[0][0]',
'activation_101[0][0]',
'activation_104[0][0]',
'activation_105[0][0]']
conv2d_109 (Conv2D) (None, 26, 26, 64) 16384 ['mixed0[0][0]']
batch_normalization_110 (Batch (None, 26, 26, 64) 192 ['conv2d_109[0][0]']
Normalization)
activation_109 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_110[0][0]']
conv2d_107 (Conv2D) (None, 26, 26, 48) 12288 ['mixed0[0][0]']
conv2d_110 (Conv2D) (None, 26, 26, 96) 55296 ['activation_109[0][0]']
batch_normalization_108 (Batch (None, 26, 26, 48) 144 ['conv2d_107[0][0]']
Normalization)
batch_normalization_111 (Batch (None, 26, 26, 96) 288 ['conv2d_110[0][0]']
Normalization)
activation_107 (Activation) (None, 26, 26, 48) 0 ['batch_normalization_108[0][0]']
activation_110 (Activation) (None, 26, 26, 96) 0 ['batch_normalization_111[0][0]']
average_pooling2d_10 (AverageP (None, 26, 26, 256) 0 ['mixed0[0][0]']
ooling2D)
conv2d_106 (Conv2D) (None, 26, 26, 64) 16384 ['mixed0[0][0]']
conv2d_108 (Conv2D) (None, 26, 26, 64) 76800 ['activation_107[0][0]']
conv2d_111 (Conv2D) (None, 26, 26, 96) 82944 ['activation_110[0][0]']
conv2d_112 (Conv2D) (None, 26, 26, 64) 16384 ['average_pooling2d_10[0][0]']
batch_normalization_107 (Batch (None, 26, 26, 64) 192 ['conv2d_106[0][0]']
Normalization)
batch_normalization_109 (Batch (None, 26, 26, 64) 192 ['conv2d_108[0][0]']
Normalization)
batch_normalization_112 (Batch (None, 26, 26, 96) 288 ['conv2d_111[0][0]']
Normalization)
batch_normalization_113 (Batch (None, 26, 26, 64) 192 ['conv2d_112[0][0]']
Normalization)
activation_106 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_107[0][0]']
activation_108 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_109[0][0]']
activation_111 (Activation) (None, 26, 26, 96) 0 ['batch_normalization_112[0][0]']
activation_112 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_113[0][0]']
mixed1 (Concatenate) (None, 26, 26, 288) 0 ['activation_106[0][0]',
'activation_108[0][0]',
'activation_111[0][0]',
'activation_112[0][0]']
conv2d_116 (Conv2D) (None, 26, 26, 64) 18432 ['mixed1[0][0]']
batch_normalization_117 (Batch (None, 26, 26, 64) 192 ['conv2d_116[0][0]']
Normalization)
activation_116 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_117[0][0]']
conv2d_114 (Conv2D) (None, 26, 26, 48) 13824 ['mixed1[0][0]']
conv2d_117 (Conv2D) (None, 26, 26, 96) 55296 ['activation_116[0][0]']
batch_normalization_115 (Batch (None, 26, 26, 48) 144 ['conv2d_114[0][0]']
Normalization)
batch_normalization_118 (Batch (None, 26, 26, 96) 288 ['conv2d_117[0][0]']
Normalization)
activation_114 (Activation) (None, 26, 26, 48) 0 ['batch_normalization_115[0][0]']
activation_117 (Activation) (None, 26, 26, 96) 0 ['batch_normalization_118[0][0]']
average_pooling2d_11 (AverageP (None, 26, 26, 288) 0 ['mixed1[0][0]']
ooling2D)
conv2d_113 (Conv2D) (None, 26, 26, 64) 18432 ['mixed1[0][0]']
conv2d_115 (Conv2D) (None, 26, 26, 64) 76800 ['activation_114[0][0]']
conv2d_118 (Conv2D) (None, 26, 26, 96) 82944 ['activation_117[0][0]']
conv2d_119 (Conv2D) (None, 26, 26, 64) 18432 ['average_pooling2d_11[0][0]']
batch_normalization_114 (Batch (None, 26, 26, 64) 192 ['conv2d_113[0][0]']
Normalization)
batch_normalization_116 (Batch (None, 26, 26, 64) 192 ['conv2d_115[0][0]']
Normalization)
batch_normalization_119 (Batch (None, 26, 26, 96) 288 ['conv2d_118[0][0]']
Normalization)
batch_normalization_120 (Batch (None, 26, 26, 64) 192 ['conv2d_119[0][0]']
Normalization)
activation_113 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_114[0][0]']
activation_115 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_116[0][0]']
activation_118 (Activation) (None, 26, 26, 96) 0 ['batch_normalization_119[0][0]']
activation_119 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_120[0][0]']
mixed2 (Concatenate) (None, 26, 26, 288) 0 ['activation_113[0][0]',
'activation_115[0][0]',
'activation_118[0][0]',
'activation_119[0][0]']
conv2d_121 (Conv2D) (None, 26, 26, 64) 18432 ['mixed2[0][0]']
batch_normalization_122 (Batch (None, 26, 26, 64) 192 ['conv2d_121[0][0]']
Normalization)
activation_121 (Activation) (None, 26, 26, 64) 0 ['batch_normalization_122[0][0]']
conv2d_122 (Conv2D) (None, 26, 26, 96) 55296 ['activation_121[0][0]']
batch_normalization_123 (Batch (None, 26, 26, 96) 288 ['conv2d_122[0][0]']
Normalization)
activation_122 (Activation) (None, 26, 26, 96) 0 ['batch_normalization_123[0][0]']
conv2d_120 (Conv2D) (None, 12, 12, 384) 995328 ['mixed2[0][0]']
conv2d_123 (Conv2D) (None, 12, 12, 96) 82944 ['activation_122[0][0]']
batch_normalization_121 (Batch (None, 12, 12, 384) 1152 ['conv2d_120[0][0]']
Normalization)
batch_normalization_124 (Batch (None, 12, 12, 96) 288 ['conv2d_123[0][0]']
Normalization)
activation_120 (Activation) (None, 12, 12, 384) 0 ['batch_normalization_121[0][0]']
activation_123 (Activation) (None, 12, 12, 96) 0 ['batch_normalization_124[0][0]']
max_pooling2d_6 (MaxPooling2D) (None, 12, 12, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 12, 12, 768) 0 ['activation_120[0][0]',
'activation_123[0][0]',
'max_pooling2d_6[0][0]']
conv2d_128 (Conv2D) (None, 12, 12, 128) 98304 ['mixed3[0][0]']
batch_normalization_129 (Batch (None, 12, 12, 128) 384 ['conv2d_128[0][0]']
Normalization)
activation_128 (Activation) (None, 12, 12, 128) 0 ['batch_normalization_129[0][0]']
conv2d_129 (Conv2D) (None, 12, 12, 128) 114688 ['activation_128[0][0]']
batch_normalization_130 (Batch (None, 12, 12, 128) 384 ['conv2d_129[0][0]']
Normalization)
activation_129 (Activation) (None, 12, 12, 128) 0 ['batch_normalization_130[0][0]']
conv2d_125 (Conv2D) (None, 12, 12, 128) 98304 ['mixed3[0][0]']
conv2d_130 (Conv2D) (None, 12, 12, 128) 114688 ['activation_129[0][0]']
batch_normalization_126 (Batch (None, 12, 12, 128) 384 ['conv2d_125[0][0]']
Normalization)
batch_normalization_131 (Batch (None, 12, 12, 128) 384 ['conv2d_130[0][0]']
Normalization)
activation_125 (Activation) (None, 12, 12, 128) 0 ['batch_normalization_126[0][0]']
activation_130 (Activation) (None, 12, 12, 128) 0 ['batch_normalization_131[0][0]']
conv2d_126 (Conv2D) (None, 12, 12, 128) 114688 ['activation_125[0][0]']
conv2d_131 (Conv2D) (None, 12, 12, 128) 114688 ['activation_130[0][0]']
batch_normalization_127 (Batch (None, 12, 12, 128) 384 ['conv2d_126[0][0]']
Normalization)
batch_normalization_132 (Batch (None, 12, 12, 128) 384 ['conv2d_131[0][0]']
Normalization)
activation_126 (Activation) (None, 12, 12, 128) 0 ['batch_normalization_127[0][0]']
activation_131 (Activation) (None, 12, 12, 128) 0 ['batch_normalization_132[0][0]']
average_pooling2d_12 (AverageP (None, 12, 12, 768) 0 ['mixed3[0][0]']
ooling2D)
conv2d_124 (Conv2D) (None, 12, 12, 192) 147456 ['mixed3[0][0]']
conv2d_127 (Conv2D) (None, 12, 12, 192) 172032 ['activation_126[0][0]']
conv2d_132 (Conv2D) (None, 12, 12, 192) 172032 ['activation_131[0][0]']
conv2d_133 (Conv2D) (None, 12, 12, 192) 147456 ['average_pooling2d_12[0][0]']
batch_normalization_125 (Batch (None, 12, 12, 192) 576 ['conv2d_124[0][0]']
Normalization)
batch_normalization_128 (Batch (None, 12, 12, 192) 576 ['conv2d_127[0][0]']
Normalization)
batch_normalization_133 (Batch (None, 12, 12, 192) 576 ['conv2d_132[0][0]']
Normalization)
batch_normalization_134 (Batch (None, 12, 12, 192) 576 ['conv2d_133[0][0]']
Normalization)
activation_124 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_125[0][0]']
activation_127 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_128[0][0]']
activation_132 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_133[0][0]']
activation_133 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_134[0][0]']
mixed4 (Concatenate) (None, 12, 12, 768) 0 ['activation_124[0][0]',
'activation_127[0][0]',
'activation_132[0][0]',
'activation_133[0][0]']
conv2d_138 (Conv2D) (None, 12, 12, 160) 122880 ['mixed4[0][0]']
batch_normalization_139 (Batch (None, 12, 12, 160) 480 ['conv2d_138[0][0]']
Normalization)
activation_138 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_139[0][0]']
conv2d_139 (Conv2D) (None, 12, 12, 160) 179200 ['activation_138[0][0]']
batch_normalization_140 (Batch (None, 12, 12, 160) 480 ['conv2d_139[0][0]']
Normalization)
activation_139 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_140[0][0]']
conv2d_135 (Conv2D) (None, 12, 12, 160) 122880 ['mixed4[0][0]']
conv2d_140 (Conv2D) (None, 12, 12, 160) 179200 ['activation_139[0][0]']
batch_normalization_136 (Batch (None, 12, 12, 160) 480 ['conv2d_135[0][0]']
Normalization)
batch_normalization_141 (Batch (None, 12, 12, 160) 480 ['conv2d_140[0][0]']
Normalization)
activation_135 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_136[0][0]']
activation_140 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_141[0][0]']
conv2d_136 (Conv2D) (None, 12, 12, 160) 179200 ['activation_135[0][0]']
conv2d_141 (Conv2D) (None, 12, 12, 160) 179200 ['activation_140[0][0]']
batch_normalization_137 (Batch (None, 12, 12, 160) 480 ['conv2d_136[0][0]']
Normalization)
batch_normalization_142 (Batch (None, 12, 12, 160) 480 ['conv2d_141[0][0]']
Normalization)
activation_136 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_137[0][0]']
activation_141 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_142[0][0]']
average_pooling2d_13 (AverageP (None, 12, 12, 768) 0 ['mixed4[0][0]']
ooling2D)
conv2d_134 (Conv2D) (None, 12, 12, 192) 147456 ['mixed4[0][0]']
conv2d_137 (Conv2D) (None, 12, 12, 192) 215040 ['activation_136[0][0]']
conv2d_142 (Conv2D) (None, 12, 12, 192) 215040 ['activation_141[0][0]']
conv2d_143 (Conv2D) (None, 12, 12, 192) 147456 ['average_pooling2d_13[0][0]']
batch_normalization_135 (Batch (None, 12, 12, 192) 576 ['conv2d_134[0][0]']
Normalization)
batch_normalization_138 (Batch (None, 12, 12, 192) 576 ['conv2d_137[0][0]']
Normalization)
batch_normalization_143 (Batch (None, 12, 12, 192) 576 ['conv2d_142[0][0]']
Normalization)
batch_normalization_144 (Batch (None, 12, 12, 192) 576 ['conv2d_143[0][0]']
Normalization)
activation_134 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_135[0][0]']
activation_137 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_138[0][0]']
activation_142 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_143[0][0]']
activation_143 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_144[0][0]']
mixed5 (Concatenate) (None, 12, 12, 768) 0 ['activation_134[0][0]',
'activation_137[0][0]',
'activation_142[0][0]',
'activation_143[0][0]']
conv2d_148 (Conv2D) (None, 12, 12, 160) 122880 ['mixed5[0][0]']
batch_normalization_149 (Batch (None, 12, 12, 160) 480 ['conv2d_148[0][0]']
Normalization)
activation_148 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_149[0][0]']
conv2d_149 (Conv2D) (None, 12, 12, 160) 179200 ['activation_148[0][0]']
batch_normalization_150 (Batch (None, 12, 12, 160) 480 ['conv2d_149[0][0]']
Normalization)
activation_149 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_150[0][0]']
conv2d_145 (Conv2D) (None, 12, 12, 160) 122880 ['mixed5[0][0]']
conv2d_150 (Conv2D) (None, 12, 12, 160) 179200 ['activation_149[0][0]']
batch_normalization_146 (Batch (None, 12, 12, 160) 480 ['conv2d_145[0][0]']
Normalization)
batch_normalization_151 (Batch (None, 12, 12, 160) 480 ['conv2d_150[0][0]']
Normalization)
activation_145 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_146[0][0]']
activation_150 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_151[0][0]']
conv2d_146 (Conv2D) (None, 12, 12, 160) 179200 ['activation_145[0][0]']
conv2d_151 (Conv2D) (None, 12, 12, 160) 179200 ['activation_150[0][0]']
batch_normalization_147 (Batch (None, 12, 12, 160) 480 ['conv2d_146[0][0]']
Normalization)
batch_normalization_152 (Batch (None, 12, 12, 160) 480 ['conv2d_151[0][0]']
Normalization)
activation_146 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_147[0][0]']
activation_151 (Activation) (None, 12, 12, 160) 0 ['batch_normalization_152[0][0]']
average_pooling2d_14 (AverageP (None, 12, 12, 768) 0 ['mixed5[0][0]']
ooling2D)
conv2d_144 (Conv2D) (None, 12, 12, 192) 147456 ['mixed5[0][0]']
conv2d_147 (Conv2D) (None, 12, 12, 192) 215040 ['activation_146[0][0]']
conv2d_152 (Conv2D) (None, 12, 12, 192) 215040 ['activation_151[0][0]']
conv2d_153 (Conv2D) (None, 12, 12, 192) 147456 ['average_pooling2d_14[0][0]']
batch_normalization_145 (Batch (None, 12, 12, 192) 576 ['conv2d_144[0][0]']
Normalization)
batch_normalization_148 (Batch (None, 12, 12, 192) 576 ['conv2d_147[0][0]']
Normalization)
batch_normalization_153 (Batch (None, 12, 12, 192) 576 ['conv2d_152[0][0]']
Normalization)
batch_normalization_154 (Batch (None, 12, 12, 192) 576 ['conv2d_153[0][0]']
Normalization)
activation_144 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_145[0][0]']
activation_147 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_148[0][0]']
activation_152 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_153[0][0]']
activation_153 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_154[0][0]']
mixed6 (Concatenate) (None, 12, 12, 768) 0 ['activation_144[0][0]',
'activation_147[0][0]',
'activation_152[0][0]',
'activation_153[0][0]']
conv2d_158 (Conv2D) (None, 12, 12, 192) 147456 ['mixed6[0][0]']
batch_normalization_159 (Batch (None, 12, 12, 192) 576 ['conv2d_158[0][0]']
Normalization)
activation_158 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_159[0][0]']
conv2d_159 (Conv2D) (None, 12, 12, 192) 258048 ['activation_158[0][0]']
batch_normalization_160 (Batch (None, 12, 12, 192) 576 ['conv2d_159[0][0]']
Normalization)
activation_159 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_160[0][0]']
conv2d_155 (Conv2D) (None, 12, 12, 192) 147456 ['mixed6[0][0]']
conv2d_160 (Conv2D) (None, 12, 12, 192) 258048 ['activation_159[0][0]']
batch_normalization_156 (Batch (None, 12, 12, 192) 576 ['conv2d_155[0][0]']
Normalization)
batch_normalization_161 (Batch (None, 12, 12, 192) 576 ['conv2d_160[0][0]']
Normalization)
activation_155 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_156[0][0]']
activation_160 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_161[0][0]']
conv2d_156 (Conv2D) (None, 12, 12, 192) 258048 ['activation_155[0][0]']
conv2d_161 (Conv2D) (None, 12, 12, 192) 258048 ['activation_160[0][0]']
batch_normalization_157 (Batch (None, 12, 12, 192) 576 ['conv2d_156[0][0]']
Normalization)
batch_normalization_162 (Batch (None, 12, 12, 192) 576 ['conv2d_161[0][0]']
Normalization)
activation_156 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_157[0][0]']
activation_161 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_162[0][0]']
average_pooling2d_15 (AverageP (None, 12, 12, 768) 0 ['mixed6[0][0]']
ooling2D)
conv2d_154 (Conv2D) (None, 12, 12, 192) 147456 ['mixed6[0][0]']
conv2d_157 (Conv2D) (None, 12, 12, 192) 258048 ['activation_156[0][0]']
conv2d_162 (Conv2D) (None, 12, 12, 192) 258048 ['activation_161[0][0]']
conv2d_163 (Conv2D) (None, 12, 12, 192) 147456 ['average_pooling2d_15[0][0]']
batch_normalization_155 (Batch (None, 12, 12, 192) 576 ['conv2d_154[0][0]']
Normalization)
batch_normalization_158 (Batch (None, 12, 12, 192) 576 ['conv2d_157[0][0]']
Normalization)
batch_normalization_163 (Batch (None, 12, 12, 192) 576 ['conv2d_162[0][0]']
Normalization)
batch_normalization_164 (Batch (None, 12, 12, 192) 576 ['conv2d_163[0][0]']
Normalization)
activation_154 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_155[0][0]']
activation_157 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_158[0][0]']
activation_162 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_163[0][0]']
activation_163 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_164[0][0]']
mixed7 (Concatenate) (None, 12, 12, 768) 0 ['activation_154[0][0]',
'activation_157[0][0]',
'activation_162[0][0]',
'activation_163[0][0]']
conv2d_166 (Conv2D) (None, 12, 12, 192) 147456 ['mixed7[0][0]']
batch_normalization_167 (Batch (None, 12, 12, 192) 576 ['conv2d_166[0][0]']
Normalization)
activation_166 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_167[0][0]']
conv2d_167 (Conv2D) (None, 12, 12, 192) 258048 ['activation_166[0][0]']
batch_normalization_168 (Batch (None, 12, 12, 192) 576 ['conv2d_167[0][0]']
Normalization)
activation_167 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_168[0][0]']
conv2d_164 (Conv2D) (None, 12, 12, 192) 147456 ['mixed7[0][0]']
conv2d_168 (Conv2D) (None, 12, 12, 192) 258048 ['activation_167[0][0]']
batch_normalization_165 (Batch (None, 12, 12, 192) 576 ['conv2d_164[0][0]']
Normalization)
batch_normalization_169 (Batch (None, 12, 12, 192) 576 ['conv2d_168[0][0]']
Normalization)
activation_164 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_165[0][0]']
activation_168 (Activation) (None, 12, 12, 192) 0 ['batch_normalization_169[0][0]']
conv2d_165 (Conv2D) (None, 5, 5, 320) 552960 ['activation_164[0][0]']
conv2d_169 (Conv2D) (None, 5, 5, 192) 331776 ['activation_168[0][0]']
batch_normalization_166 (Batch (None, 5, 5, 320) 960 ['conv2d_165[0][0]']
Normalization)
batch_normalization_170 (Batch (None, 5, 5, 192) 576 ['conv2d_169[0][0]']
Normalization)
activation_165 (Activation) (None, 5, 5, 320) 0 ['batch_normalization_166[0][0]']
activation_169 (Activation) (None, 5, 5, 192) 0 ['batch_normalization_170[0][0]']
max_pooling2d_7 (MaxPooling2D) (None, 5, 5, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 5, 5, 1280) 0 ['activation_165[0][0]',
'activation_169[0][0]',
'max_pooling2d_7[0][0]']
conv2d_174 (Conv2D) (None, 5, 5, 448) 573440 ['mixed8[0][0]']
batch_normalization_175 (Batch (None, 5, 5, 448) 1344 ['conv2d_174[0][0]']
Normalization)
activation_174 (Activation) (None, 5, 5, 448) 0 ['batch_normalization_175[0][0]']
conv2d_171 (Conv2D) (None, 5, 5, 384) 491520 ['mixed8[0][0]']
conv2d_175 (Conv2D) (None, 5, 5, 384) 1548288 ['activation_174[0][0]']
batch_normalization_172 (Batch (None, 5, 5, 384) 1152 ['conv2d_171[0][0]']
Normalization)
batch_normalization_176 (Batch (None, 5, 5, 384) 1152 ['conv2d_175[0][0]']
Normalization)
activation_171 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_172[0][0]']
activation_175 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_176[0][0]']
conv2d_172 (Conv2D) (None, 5, 5, 384) 442368 ['activation_171[0][0]']
conv2d_173 (Conv2D) (None, 5, 5, 384) 442368 ['activation_171[0][0]']
conv2d_176 (Conv2D) (None, 5, 5, 384) 442368 ['activation_175[0][0]']
conv2d_177 (Conv2D) (None, 5, 5, 384) 442368 ['activation_175[0][0]']
average_pooling2d_16 (AverageP (None, 5, 5, 1280) 0 ['mixed8[0][0]']
ooling2D)
conv2d_170 (Conv2D) (None, 5, 5, 320) 409600 ['mixed8[0][0]']
batch_normalization_173 (Batch (None, 5, 5, 384) 1152 ['conv2d_172[0][0]']
Normalization)
batch_normalization_174 (Batch (None, 5, 5, 384) 1152 ['conv2d_173[0][0]']
Normalization)
batch_normalization_177 (Batch (None, 5, 5, 384) 1152 ['conv2d_176[0][0]']
Normalization)
batch_normalization_178 (Batch (None, 5, 5, 384) 1152 ['conv2d_177[0][0]']
Normalization)
conv2d_178 (Conv2D) (None, 5, 5, 192) 245760 ['average_pooling2d_16[0][0]']
batch_normalization_171 (Batch (None, 5, 5, 320) 960 ['conv2d_170[0][0]']
Normalization)
activation_172 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_173[0][0]']
activation_173 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_174[0][0]']
activation_176 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_177[0][0]']
activation_177 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_178[0][0]']
batch_normalization_179 (Batch (None, 5, 5, 192) 576 ['conv2d_178[0][0]']
Normalization)
activation_170 (Activation) (None, 5, 5, 320) 0 ['batch_normalization_171[0][0]']
mixed9_0 (Concatenate) (None, 5, 5, 768) 0 ['activation_172[0][0]',
'activation_173[0][0]']
concatenate_2 (Concatenate) (None, 5, 5, 768) 0 ['activation_176[0][0]',
'activation_177[0][0]']
activation_178 (Activation) (None, 5, 5, 192) 0 ['batch_normalization_179[0][0]']
mixed9 (Concatenate) (None, 5, 5, 2048) 0 ['activation_170[0][0]',
'mixed9_0[0][0]',
'concatenate_2[0][0]',
'activation_178[0][0]']
conv2d_183 (Conv2D) (None, 5, 5, 448) 917504 ['mixed9[0][0]']
batch_normalization_184 (Batch (None, 5, 5, 448) 1344 ['conv2d_183[0][0]']
Normalization)
activation_183 (Activation) (None, 5, 5, 448) 0 ['batch_normalization_184[0][0]']
conv2d_180 (Conv2D) (None, 5, 5, 384) 786432 ['mixed9[0][0]']
conv2d_184 (Conv2D) (None, 5, 5, 384) 1548288 ['activation_183[0][0]']
batch_normalization_181 (Batch (None, 5, 5, 384) 1152 ['conv2d_180[0][0]']
Normalization)
batch_normalization_185 (Batch (None, 5, 5, 384) 1152 ['conv2d_184[0][0]']
Normalization)
activation_180 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_181[0][0]']
activation_184 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_185[0][0]']
conv2d_181 (Conv2D) (None, 5, 5, 384) 442368 ['activation_180[0][0]']
conv2d_182 (Conv2D) (None, 5, 5, 384) 442368 ['activation_180[0][0]']
conv2d_185 (Conv2D) (None, 5, 5, 384) 442368 ['activation_184[0][0]']
conv2d_186 (Conv2D) (None, 5, 5, 384) 442368 ['activation_184[0][0]']
average_pooling2d_17 (AverageP (None, 5, 5, 2048) 0 ['mixed9[0][0]']
ooling2D)
conv2d_179 (Conv2D) (None, 5, 5, 320) 655360 ['mixed9[0][0]']
batch_normalization_182 (Batch (None, 5, 5, 384) 1152 ['conv2d_181[0][0]']
Normalization)
batch_normalization_183 (Batch (None, 5, 5, 384) 1152 ['conv2d_182[0][0]']
Normalization)
batch_normalization_186 (Batch (None, 5, 5, 384) 1152 ['conv2d_185[0][0]']
Normalization)
batch_normalization_187 (Batch (None, 5, 5, 384) 1152 ['conv2d_186[0][0]']
Normalization)
conv2d_187 (Conv2D) (None, 5, 5, 192) 393216 ['average_pooling2d_17[0][0]']
batch_normalization_180 (Batch (None, 5, 5, 320) 960 ['conv2d_179[0][0]']
Normalization)
activation_181 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_182[0][0]']
activation_182 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_183[0][0]']
activation_185 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_186[0][0]']
activation_186 (Activation) (None, 5, 5, 384) 0 ['batch_normalization_187[0][0]']
batch_normalization_188 (Batch (None, 5, 5, 192) 576 ['conv2d_187[0][0]']
Normalization)
activation_179 (Activation) (None, 5, 5, 320) 0 ['batch_normalization_180[0][0]']
mixed9_1 (Concatenate) (None, 5, 5, 768) 0 ['activation_181[0][0]',
'activation_182[0][0]']
concatenate_3 (Concatenate) (None, 5, 5, 768) 0 ['activation_185[0][0]',
'activation_186[0][0]']
activation_187 (Activation) (None, 5, 5, 192) 0 ['batch_normalization_188[0][0]']
mixed10 (Concatenate) (None, 5, 5, 2048) 0 ['activation_179[0][0]',
'mixed9_1[0][0]',
'concatenate_3[0][0]',
'activation_187[0][0]']
global_average_pooling2d_1 (Gl (None, 2048) 0 ['mixed10[0][0]']
obalAveragePooling2D)
batch_normalization_189 (Batch (None, 2048) 8192 ['global_average_pooling2d_1[0][0
Normalization) ]']
dense_2 (Dense) (None, 1024) 2098176 ['batch_normalization_189[0][0]']
dropout_1 (Dropout) (None, 1024) 0 ['dense_2[0][0]']
dense_3 (Dense) (None, 10) 10250 ['dropout_1[0][0]']
==================================================================================================
Total params: 23,919,402
Trainable params: 2,112,522
Non-trainable params: 21,806,880
__________________________________________________________________________________________________
STEP_SIZE_TRAIN = train_data.n // train_data.batch_size
STEP_SIZE_VALID = val_data.n // val_data.batch_size
history = model.fit_generator(train_data,
steps_per_epoch = STEP_SIZE_TRAIN,
validation_data = val_data,
validation_steps = STEP_SIZE_VALID,
epochs = 50,
verbose = 1)
/tmp/ipykernel_28/3850357475.py:4: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators. history = model.fit_generator(train_data,
Epoch 1/50 14/14 [==============================] - 15s 679ms/step - loss: 1.9187 - accuracy: 0.4883 - val_loss: 4.0139 - val_accuracy: 0.2500 Epoch 2/50 14/14 [==============================] - 9s 683ms/step - loss: 1.2194 - accuracy: 0.6869 - val_loss: 2.2539 - val_accuracy: 0.3646 Epoch 3/50 14/14 [==============================] - 9s 616ms/step - loss: 1.1831 - accuracy: 0.7220 - val_loss: 2.6194 - val_accuracy: 0.3542 Epoch 4/50 14/14 [==============================] - 9s 631ms/step - loss: 1.0682 - accuracy: 0.7360 - val_loss: 1.9498 - val_accuracy: 0.4583 Epoch 5/50 14/14 [==============================] - 9s 647ms/step - loss: 1.0325 - accuracy: 0.7383 - val_loss: 1.9896 - val_accuracy: 0.4896 Epoch 6/50 14/14 [==============================] - 9s 618ms/step - loss: 0.9291 - accuracy: 0.7944 - val_loss: 1.8325 - val_accuracy: 0.4896 Epoch 7/50 14/14 [==============================] - 9s 619ms/step - loss: 0.9079 - accuracy: 0.8107 - val_loss: 1.0515 - val_accuracy: 0.6354 Epoch 8/50 14/14 [==============================] - 9s 623ms/step - loss: 0.6851 - accuracy: 0.8084 - val_loss: 0.7871 - val_accuracy: 0.7708 Epoch 9/50 14/14 [==============================] - 8s 571ms/step - loss: 0.7495 - accuracy: 0.8154 - val_loss: 0.8246 - val_accuracy: 0.7708 Epoch 10/50 14/14 [==============================] - 8s 566ms/step - loss: 0.7750 - accuracy: 0.8084 - val_loss: 1.0588 - val_accuracy: 0.6771 Epoch 11/50 14/14 [==============================] - 8s 612ms/step - loss: 0.6539 - accuracy: 0.8178 - val_loss: 1.2017 - val_accuracy: 0.6458 Epoch 12/50 14/14 [==============================] - 9s 623ms/step - loss: 0.6669 - accuracy: 0.8551 - val_loss: 0.6005 - val_accuracy: 0.8021 Epoch 13/50 14/14 [==============================] - 9s 626ms/step - loss: 0.7037 - accuracy: 0.8107 - val_loss: 0.9566 - val_accuracy: 0.7500 Epoch 14/50 14/14 [==============================] - 8s 548ms/step - loss: 0.5626 - accuracy: 0.8481 - val_loss: 0.6878 - val_accuracy: 0.7917 Epoch 15/50 14/14 [==============================] - 9s 619ms/step - loss: 0.5128 - accuracy: 0.8482 - val_loss: 0.7610 - val_accuracy: 0.7812 Epoch 16/50 14/14 [==============================] - 8s 562ms/step - loss: 0.5965 - accuracy: 0.8458 - val_loss: 0.7079 - val_accuracy: 0.7812 Epoch 17/50 14/14 [==============================] - 8s 594ms/step - loss: 0.5105 - accuracy: 0.8762 - val_loss: 0.6073 - val_accuracy: 0.8438 Epoch 18/50 14/14 [==============================] - 9s 620ms/step - loss: 0.4608 - accuracy: 0.8949 - val_loss: 0.6094 - val_accuracy: 0.8229 Epoch 19/50 14/14 [==============================] - 9s 661ms/step - loss: 0.5460 - accuracy: 0.8762 - val_loss: 0.5127 - val_accuracy: 0.8646 Epoch 20/50 14/14 [==============================] - 8s 563ms/step - loss: 0.5894 - accuracy: 0.8458 - val_loss: 0.8784 - val_accuracy: 0.7604 Epoch 21/50 14/14 [==============================] - 8s 578ms/step - loss: 0.5528 - accuracy: 0.8575 - val_loss: 0.5629 - val_accuracy: 0.8021 Epoch 22/50 14/14 [==============================] - 8s 562ms/step - loss: 0.5240 - accuracy: 0.8692 - val_loss: 0.6130 - val_accuracy: 0.8333 Epoch 23/50 14/14 [==============================] - 9s 681ms/step - loss: 0.6366 - accuracy: 0.8458 - val_loss: 0.5905 - val_accuracy: 0.8333 Epoch 24/50 14/14 [==============================] - 8s 562ms/step - loss: 0.5336 - accuracy: 0.8738 - val_loss: 0.7108 - val_accuracy: 0.8229 Epoch 25/50 14/14 [==============================] - 9s 631ms/step - loss: 0.4208 - accuracy: 0.8879 - val_loss: 0.7312 - val_accuracy: 0.7917 Epoch 26/50 14/14 [==============================] - 8s 557ms/step - loss: 0.3833 - accuracy: 0.8808 - val_loss: 0.6502 - val_accuracy: 0.8021 Epoch 27/50 14/14 [==============================] - 9s 616ms/step - loss: 0.4407 - accuracy: 0.8879 - val_loss: 0.5021 - val_accuracy: 0.8125 Epoch 28/50 14/14 [==============================] - 9s 623ms/step - loss: 0.4053 - accuracy: 0.8925 - val_loss: 0.4924 - val_accuracy: 0.8021 Epoch 29/50 14/14 [==============================] - 9s 633ms/step - loss: 0.4774 - accuracy: 0.8692 - val_loss: 0.7524 - val_accuracy: 0.7812 Epoch 30/50 14/14 [==============================] - 9s 684ms/step - loss: 0.5895 - accuracy: 0.8668 - val_loss: 0.7754 - val_accuracy: 0.8438 Epoch 31/50 14/14 [==============================] - 8s 562ms/step - loss: 0.3404 - accuracy: 0.8949 - val_loss: 0.6172 - val_accuracy: 0.8854 Epoch 32/50 14/14 [==============================] - 8s 562ms/step - loss: 0.4024 - accuracy: 0.8855 - val_loss: 0.9028 - val_accuracy: 0.8021 Epoch 33/50 14/14 [==============================] - 9s 620ms/step - loss: 0.4222 - accuracy: 0.8972 - val_loss: 0.3839 - val_accuracy: 0.9167 Epoch 34/50 14/14 [==============================] - 9s 617ms/step - loss: 0.4213 - accuracy: 0.8879 - val_loss: 0.5627 - val_accuracy: 0.8750 Epoch 35/50 14/14 [==============================] - 8s 580ms/step - loss: 0.4771 - accuracy: 0.8621 - val_loss: 0.8821 - val_accuracy: 0.8542 Epoch 36/50 14/14 [==============================] - 9s 623ms/step - loss: 0.4736 - accuracy: 0.8762 - val_loss: 0.5562 - val_accuracy: 0.8438 Epoch 37/50 14/14 [==============================] - 9s 631ms/step - loss: 0.4584 - accuracy: 0.8785 - val_loss: 0.6594 - val_accuracy: 0.8333 Epoch 38/50 14/14 [==============================] - 8s 568ms/step - loss: 0.4117 - accuracy: 0.8995 - val_loss: 0.5781 - val_accuracy: 0.8438 Epoch 39/50 14/14 [==============================] - 9s 644ms/step - loss: 0.3550 - accuracy: 0.9040 - val_loss: 0.3654 - val_accuracy: 0.8854 Epoch 40/50 14/14 [==============================] - 8s 567ms/step - loss: 0.3403 - accuracy: 0.9206 - val_loss: 0.7361 - val_accuracy: 0.8333 Epoch 41/50 14/14 [==============================] - 8s 612ms/step - loss: 0.2800 - accuracy: 0.9252 - val_loss: 0.4694 - val_accuracy: 0.8438 Epoch 42/50 14/14 [==============================] - 8s 571ms/step - loss: 0.4275 - accuracy: 0.8949 - val_loss: 0.8316 - val_accuracy: 0.8646 Epoch 43/50 14/14 [==============================] - 8s 577ms/step - loss: 0.4300 - accuracy: 0.8668 - val_loss: 0.6342 - val_accuracy: 0.8438 Epoch 44/50 14/14 [==============================] - 9s 635ms/step - loss: 0.4646 - accuracy: 0.8879 - val_loss: 0.9816 - val_accuracy: 0.8021 Epoch 45/50 14/14 [==============================] - 9s 650ms/step - loss: 0.5464 - accuracy: 0.8738 - val_loss: 0.4421 - val_accuracy: 0.8854 Epoch 46/50 14/14 [==============================] - 9s 622ms/step - loss: 0.2884 - accuracy: 0.9182 - val_loss: 0.9180 - val_accuracy: 0.8125 Epoch 47/50 14/14 [==============================] - 9s 627ms/step - loss: 0.4681 - accuracy: 0.8692 - val_loss: 0.5182 - val_accuracy: 0.8750 Epoch 48/50 14/14 [==============================] - 9s 697ms/step - loss: 0.2947 - accuracy: 0.9206 - val_loss: 1.1722 - val_accuracy: 0.7292 Epoch 49/50 14/14 [==============================] - 9s 616ms/step - loss: 0.3664 - accuracy: 0.9089 - val_loss: 0.4954 - val_accuracy: 0.8958 Epoch 50/50 14/14 [==============================] - 8s 553ms/step - loss: 0.3885 - accuracy: 0.8972 - val_loss: 0.9483 - val_accuracy: 0.8646
plt.xlabel('Epoch Number')
plt.ylabel('Loss')
plt.plot(history.history['loss'], label='training set')
plt.plot(history.history['val_loss'], label='test set')
plt.legend()
<matplotlib.legend.Legend at 0x7a0d39d31de0>
plt.xlabel('Epoch Number')
plt.ylabel('Accuracy')
plt.plot(history.history['accuracy'], label='training set')
plt.plot(history.history['val_accuracy'], label='test set')
plt.legend()
<matplotlib.legend.Legend at 0x7a0d39d73160>
model_name = 'hand_sign_recognition_inceptionV3.h5'
model.save(model_name, save_format='h5')
class_map = train_data.class_indices
classes = []
for key in class_map.keys():
classes.append(key)
def predict_image(filename, model):
img_ = image.load_img(filename, target_size=(228, 228))
img_array = image.img_to_array(img_)
img_processed = np.expand_dims(img_array, axis=0)
img_processed /= 255.
prediction = model.predict(img_processed)
index = np.argmax(prediction)
plt.title("Prediction - {}".format(str(classes[index]).title()), size=18, color='red')
plt.imshow(img_array)
predict_image('/kaggle/input/asl-digits-0-9/ASL Digits/test/0/hand1_0_bot_seg_3_cropped.jpeg', model)
1/1 [==============================] - 2s 2s/step
predict_image('/kaggle/input/asl-digits-0-9/ASL Digits/test/5/hand1_5_bot_seg_2_cropped.jpeg', model)
1/1 [==============================] - 0s 27ms/step
predict_image('/kaggle/input/asl-digits-0-9/ASL Digits/test/6/hand1_6_bot_seg_5_cropped.jpeg', model)
1/1 [==============================] - 0s 29ms/step
predict_image('/kaggle/input/asl-digits-0-9/ASL Digits/test/8/hand1_8_bot_seg_5_cropped.jpeg', model)
1/1 [==============================] - 0s 28ms/step
predict_image('/kaggle/input/asl-digits-0-9/ASL Digits/test/2/hand3_2_dif_seg_3_cropped.jpeg', model)
1/1 [==============================] - 0s 28ms/step